This notebook contains:
Importing data: Reading and pre-processing a catalogue
Importing a source model and selecting the catalogue for the source
Defining the seismicity model for the source
Calculating hazard curves
Plotting hazard curves
In [ ]:
def get_map_projection(src):
"""
Return map projection specific to source.
"""
# extract rupture enclosing polygon (considering a buffer of 10 km)
rup_poly = src.get_rupture_enclosing_polygon(10.)
min_lon = numpy.min(rup_poly.lons)
max_lon = numpy.max(rup_poly.lons)
min_lat = numpy.min(rup_poly.lats)
max_lat = numpy.max(rup_poly.lats)
# create map projection
m = Basemap(projection='merc', llcrnrlat=min_lat, urcrnrlat=max_lat,
llcrnrlon=min_lon, urcrnrlon=max_lon, resolution='l')
return min_lon, max_lon, min_lat, max_lat, m
In [ ]:
from hmtk.parsers.catalogue import CsvCatalogueParser
from hmtk.parsers.source_model.nrml04_parser import nrmlSourceModelParser
#Importing catalogue
catalogue_filename = 'input_data/catalogues/catalogue_example.csv'
parser = CsvCatalogueParser(catalogue_filename)
catalogue = parser.read_file()
# Reading area source
source_model_file1 = 'input_data/source_models/area_source_minimal.xml'
parser = nrmlSourceModelParser(source_model_file1)
source_model = parser.read_file()
In [ ]:
from hmtk.plotting.mapping import HMTKBaseMap
map_config = {'min_lon': -82., 'max_lon': -65., 'min_lat': -5, 'max_lat': 12., 'resolution':'l'}
basemap1 = HMTKBaseMap(map_config, 'Catalogue and sources')
basemap1.add_catalogue(catalogue, overlay=True)
basemap1.add_source_model(source_model, area_border='r-', border_width=2.0)
In [ ]:
from copy import deepcopy
from hmtk.seismicity.declusterer.dec_gardner_knopoff import GardnerKnopoffType1
from hmtk.seismicity.declusterer.distance_time_windows import UhrhammerWindow
from hmtk.seismicity.occurrence.weichert import Weichert
from hmtk.seismicity.max_magnitude.cumulative_moment_release import CumulativeMoment
# Declustering
declustering_gk = GardnerKnopoffType1()
declust_config_gk = {'time_distance_window': UhrhammerWindow(), 'fs_time_prop': 1.0}
cluster_index_gk, cluster_flag_gk = declustering_gk.decluster(catalogue, declust_config_gk)
# Purging catalogue
catalogue_dec = deepcopy(catalogue)
mainshock_flag = cluster_flag_gk == 0
catalogue_dec.purge_catalogue(mainshock_flag)
# Completeness
completeness_table = np.array([[1995., 5.],
[1983., 6.],
[1970., 7.]])
In [ ]:
from hmtk.seismicity.selector import CatalogueSelector
# Selecting subcatalogue
area_source = source_model.sources[0]
selector = CatalogueSelector(catalogue)
area_source.select_catalogue(selector)
print 'Number of events in the area source', area_source.catalogue.get_number_events()
In [ ]:
map_config = {'min_lon': -82., 'max_lon': -65., 'min_lat': -5, 'max_lat': 12., 'resolution':'l'}
basemap2 = HMTKBaseMap(map_config, 'Subcatalogue based on area source')
basemap2.add_catalogue(area_source.catalogue, overlay=True)
basemap2.add_source_model(source_model, area_border='r-', border_width=2.0)
In [ ]:
from hmtk.plotting.seismicity.catalogue_plots import (plot_depth_histogram,
plot_observed_recurrence,
plot_magnitude_time_density)
magnitude_bin = 0.1
time_bin = 1.0
depth_bin = 20.0
# Look at the magnitude time density
plot_magnitude_time_density(area_source.catalogue, magnitude_bin, time_bin)
# Look at the observed recurrence
plot_observed_recurrence(area_source.catalogue, completeness_table, 0.1)
# Look at the depth histogram
plot_depth_histogram(area_source.catalogue, depth_bin, normalisation=True)
OQ-hazardlib describes the aleatory variability in depth and rupture orientation using a probability mass function. This defines the probability for ruptures with different nodal planes and hypocenter depths, in this manner both characteristics are taken as discrete random variables.
http://docs.openquake.org/oq-hazardlib/stable/pmf.html?highlight=pmf
In both cases is needed to provide a probability associated to a set of data:
- Nodal Plane: PMF([(1, NodalPlane(strike=90., dip=45., rake=0.))])
In this case, there is a probability of 1 that the rupture occurs with dip 45 degrees, strike 90 degrees and strike-slip focal mechanism.
- Hypocenter depth: PMF([(1, 25.)])
In this case, there is a probability of 1 that the rupture occurs at 25 km depth.
For the hypocentral depth distribution, we can use the toolkit to define the probability mass function directly from the distribution of hypocentral depths
In [ ]:
# Histogram bin width
depth_bin_width = 20.0
# Set up histogram bins
depth_bins = np.arange(area_source.upper_depth, # Upper seismogenic depth
area_source.lower_depth + depth_bin_width, # Lower seismogenic depth (plus one more increment)
depth_bin_width)
# The catalogue has the method .get_depth_pmf, which will look at the hypocentral
# depth distribution and define a PMF from it
hdd_pmf = area_source.catalogue.get_depth_pmf(depth_bins)
# Look at the probabilities
for prob, depth in hdd_pmf.data:
print "Probability = %.3f, Depth = %.1f" % (prob, depth)
# Assign the PMF to the source
area_source.hypo_depth_dist = hdd_pmf
At the moment there are no tools for deriving this from the catalogue. So we define two nodal planes with equal probability:\ 1) Strike = 45.0, Dip = 90.0, Rake = 0.0\ 2) Strike = 135.0, Dip = 90.0, Rake = 0.0
In [ ]:
# Import the NodalPlane and the PMF objects from the hazardlib
from openquake.hazardlib.geo.nodalplane import NodalPlane
from openquake.hazardlib.pmf import PMF
# Define our two Nodal Planes
nodal_plane1 = NodalPlane(strike = 45.0, dip = 90.0, rake = 0.0)
nodal_plane2 = NodalPlane(strike = 135.0, dip = 90.0, rake = 0.0)
# Assign this PMF to the area source
area_source.nodal_plane_dist = PMF([(0.5, nodal_plane1),
(0.5, nodal_plane2)])
In [ ]:
from hmtk.seismicity.occurrence.weichert import Weichert
from hmtk.seismicity.occurrence.b_maximum_likelihood import BMaxLikelihood
# Weichert Method
recurrence1 = Weichert()
weichert_config = {'magnitude_interval': 0.1, 'bvalue': 1., 'itstab': 1E-5, 'maxiter': 1000}
bval1, sigmab1, aval1, sigmaa1 = recurrence1.calculate(area_source.catalogue, weichert_config, completeness = completeness_table)
print "--- Weichert ---"
print "a-value = %.4f +/- %.4f, b-value = %.4f +/- %.4f" % (aval1, sigmaa1, bval1, sigmab1)
# Weighted Maximum Likelihood Method
recurrence2 = BMaxLikelihood()
wml_config = {'magnitude_interval': 0.1, 'Average Type': 'Weighted'}
bval2, sigmab2, aval2, sigmaa2 = recurrence2.calculate(area_source.catalogue, wml_config, completeness = completeness_table)
print "--- Weighted Maximum Likelihood ---"
print "a-value = %.4f +/- %.4f, b-value = %.4f +/- %.4f" % (aval2, sigmaa2, bval2, sigmab2)
In [ ]:
cm_config = {'number_bootstraps': 500}
cmmax_estimator = CumulativeMoment()
cmmax, cmmax_uncertainty = cmmax_estimator.get_mmax(catalogue_dec, cm_config)
print "Maximum Magnitude (estimate): %.3f +/- %.3f" % (cmmax, cmmax_uncertainty)
In [ ]:
from hmtk.sources.source_model import mtkSourceModel
from openquake.hazardlib.mfd.truncated_gr import TruncatedGRMFD
# Set the minimum magnitude to 5.0
minimum_mag = 5.0
# Set the width of descretisation of the MFD to 0.2
mfd_bin_width = 0.2
# Build source model1 - using Wiechert b-value
area_source1 = deepcopy(area_source)
area_source1.mfd =TruncatedGRMFD(min_mag=minimum_mag, # Minimum magnitude
max_mag=cmmax, # Maximum magnitude
bin_width=mfd_bin_width, # Bin width for discretisation of MFD
a_val=aval1, # a-value
b_val=bval1) # b-value
source_model1 = mtkSourceModel("001", # Source Model ID
"Area Source Weichert", # Source Model Name
sources=[area_source1]) # List of Sources
# Build Source Model 2 - Using Weighted MLE
area_source2 = deepcopy(area_source)
area_source2.mfd =TruncatedGRMFD(min_mag=minimum_mag,
max_mag=cmmax,
bin_width=mfd_bin_width,
a_val=aval2,
b_val=bval2)
source_model2 = mtkSourceModel("002", "Area Source Weighted MLE", sources=[area_source2])
Poissonian temporal occurrence model requires the time span (investigation time) as an input.
http://docs.openquake.org/oq-hazardlib/stable/tom.html
In the current example we use a 1-year investigation time
In [ ]:
from openquake.hazardlib.tom import PoissonTOM
tom = PoissonTOM(1.0)
In [ ]:
oq_source_model1 = source_model1.convert_to_oqhazardlib(tom, area_discretisation=20.0)
oq_source_model2 = source_model2.convert_to_oqhazardlib(tom, area_discretisation=20.0)
In this case we are interested in the hazard at two sites:
In [ ]:
from hmtk.hazard import HMTKHazardCurve, site_array_to_collection
# Create our sites in a table (a 2-D array) with all of the site attributes
# ID, Longitude, Latitude, Vs30, Vs30Measured, Z1.0, Z2.5
site_array = np.array([[1.0, -76.6, 6.0, 800.0, 1.0, 40.0, 1.0],
[2.0, -73.0, 0.5, 350.0, 1.0, 80.0, 1.5]])
# Create our "Site Collection" for the calculation
sites = site_array_to_collection(site_array)
In [ ]:
from mpl_toolkits.basemap import Basemap
# plot ruptures. Color proportional to magnitude
fig1 = pyplot.figure(figsize=(10, 10), dpi=160)
# loop over ruptures, extract rupture surface boundary and magnitude
min_lon, max_lon, min_lat, max_lat, m = get_map_projection(oq_source_model1[0])
print min_lon, max_lon, min_lat, max_lat
m.drawparallels(numpy.arange(np.ceil(min_lat), np.ceil(max_lat), 1.0), labels=[True, False, False, True])
m.drawmeridians(numpy.arange(np.ceil(min_lon), np.ceil(max_lon), 1.0), labels=[True, False, False, True])
m.drawcoastlines()
m.drawcountries()
# plot area source boundary
x, y = m(oq_source_model1[0].polygon.lons, oq_source_model1[0].polygon.lats)
m.plot(x, y, linewidth=2, color='black')
lon1, lat1 = m(site_array[0, 1], site_array[0, 2])
lon2, lat2 = m(site_array[1, 1], site_array[1, 2])
m.plot(lon1, lat1, 'Db', label='Site 1', markersize=10.)
m.plot(lon2, lat2, 'sr', label='Site 2', markersize=10.)
pyplot.legend()
In [ ]:
gmpes = {"Active Shallow Crust": 'BooreEtAl2014'}
In [ ]:
# Intensity measure levels
imls = [0.001, 0.005, 0.007, 0.0098, 0.0137, 0.0192, 0.0269, 0.0376, 0.0527, 0.0738, 0.103, 0.145,
0.203, 0.284, 0.397, 0.556, 0.778, 1.09, 1.52, 2.13]
# Run Hazard Curves For Source Model 1 (Weichert)
curve_builder1 = HMTKHazardCurve(oq_source_model1, # Source Model
sites, # Site Model
{"Active Shallow Crust": "BooreEtAl2014"}, # GMPE & Tectonic Region Type
[imls], # Intensity Measure levels - 1 list per GMPE
["PGA"], # Intensity Measure Types
truncation_level = 3.0, # Sigma truncation
source_integration_dist=200.0, # Nearest source-to-site distance
rupture_integration_dist=200.0) # Nearest rupture-to-site distance
curves1 = curve_builder1.calculate_hazard()
# View the hazard curves
print "Hazard Curve 1"
print "PGA (g) Annual PoE - Site 1 Annual Poe - Site 2"
for iloc, iml in enumerate(imls):
print "%.4f %.8E %.8E" %(iml, curves1['PGA'][0, iloc], curves1['PGA'][1,iloc])
print "======================================================"
# Run Hazard Curves For Source Model 2 (Weighted MLE)
curve_builder2 = HMTKHazardCurve(oq_source_model2,
sites,
{"Active Shallow Crust": "BooreEtAl2014"},
[imls],
["PGA"],
truncation_level = 3.0,
source_integration_dist=200.0,
rupture_integration_dist=200.0)
curves2 = curve_builder2.calculate_hazard()
print "Hazard Curve 2"
print "PGA (g) Annual PoE - Site 1 Annual Poe - Site 2"
for iloc, iml in enumerate(imls):
print "%.4f %.8E %.8E" %(iml, curves2['PGA'][0, iloc], curves2['PGA'][1,iloc])
print "======================================================"
In [ ]:
fig = pyplot.figure(figsize=(9, 9))
pyplot.loglog(imls, curves1["PGA"][0,:], '-b', linewidth=2, label='Weichert')
pyplot.loglog(imls, curves2["PGA"][0,:], '-r', linewidth=2, label='Weighted MLE')
pyplot.legend(loc="upper left", bbox_to_anchor=(1,1))
plt.xlim([5E-3, 2.0])
plt.ylim([1E-7, 1])
pyplot.xlabel('PGA (g)', fontsize=20)
pyplot.ylabel('Annual Probability of exceedance', fontsize=20)
pyplot.title("Hazard Curves for Site 1", fontsize=20)
In [ ]:
fig = pyplot.figure(figsize=(9, 9))
pyplot.loglog(imls, curves1["PGA"][1,:], '-b', linewidth=2, label='Weichert')
pyplot.loglog(imls, curves2["PGA"][1,:], '-r', linewidth=2, label='Weighted MLE')
pyplot.legend(loc="upper left", bbox_to_anchor=(1,1))
plt.xlim([5E-3, 2.0])
plt.ylim([1E-7, 1])
pyplot.xlabel('PGA (g)', fontsize=20)
pyplot.ylabel('Annual Probability of exceedance', fontsize=20)
pyplot.title("Hazard Curves for Site 2", fontsize=20)